Python 3.11.0 | packaged by conda-forge | (main, Jan 16 2023, 14:12:30) [MSC v.1916 64 bit (AMD64)]
Type 'copyright', 'credits' or 'license' for more information
IPython 8.12.2 -- An enhanced Interactive Python. Type '?' for help.

In [ ]:
MEASURE_FOLDER = os.path.join(EXP_FOLDER, 'CATrunOct4')
df = get_data_frame(MEASURE_FOLDER)
df.drop(columns='betaPAErr', inplace=True)
df.dropna(inplace=True)

dfc = df.copy()
df_grouped = df.groupby(by=['pump_reference', 'pump_AOM_freq'])
groups = dict(list(df_grouped))
dfs = [df for df in groups.values()]

max_freqs = [384394.7]*len(dfs)
zipped_data = list(zip(dfs, max_freqs))
num1 = 3
fig1, ax1s = plt.subplots(num1)    # detuning
fig1b, ax1sb = plt.subplots(num1)
fig1size = (8, num1*6)
fig1bsize= fig1size
fig1b.set_size_inches(fig1bsize)

num2 = 2
fig2, ax2s = plt.subplots(num2) # pump_reference
fig2b, ax2sb = plt.subplots(num2)
fig2size = (8, num2*6)
fig2bsize= fig2size
fig2b.set_size_inches(fig2bsize)

for i, (df, max_freq)  in enumerate(zipped_data[:]):
    j1 = i%num1
    j2 = i//num1
    
    data = df.dropna()
    data = data[data['ratio'] < 1.3]
    freqs = ((max_freq-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-0.0)*FREQVSCURR)
    
    pref = data['pump_reference'].mean()
    detuning = 180-2*data['pump_AOM_freq'].mean()
    print(pref, detuning)
    
    
    ax1s[j1] = plot_spline_fit(ax1s[j1], 
                            x=freqs, y=data['ratio'], yerr=data['ratioErr']
                            ,scolor=f'C{j2}', mfc=f'C{j2}',color=f'C{j2}'
                            ,s=0.0, ms=5,linewidth=1.5, figsize=fig1size
                            ,label=f"Pump Amplitude = {pref:.2f}", fig=fig1)
    
    ax1s[j1].set_title(f"Detuning  = {detuning:.2f}", **titledict)
    ax1s[j1].legend()
    
    ax2s[j2] = plot_spline_fit(ax2s[j2], 
                            x=freqs, y=data['ratio'], yerr=data['ratioErr'],
                            scolor=f'C{j1}', mfc=f'C{j1}',color=f'C{j1}', 
                            s=0.0, ms=5, linewidth=1.5, figsize=fig2size,
                            label=f"Detuning  = {detuning:.2f}", fig=fig2)
    
    ax2s[j2].set_title(f"Pump Amplitude = { pref:.2f}", **titledict)
    ax2s[j2].legend()
    
    
    betaPAs = [a for a,b in sorted(zip(data['betaPA'], freqs), key=lambda pair:pair[1])]
    freqs = sorted(freqs)
    
    ax1sb[j1].plot(freqs, betaPAs, 'o-', color=f'C{j2}', ms=5,  label=f"Pump Amplitude = {pref:.2f}")
    ax1sb[j1].set_xlabel(r'$\Delta$ (GHz)')
    ax1sb[j1].set_ylabel(r'$\beta_{\mathrm{eff}}$ ')
    ax1sb[j1].legend()
    ax1sb[j1].set_title(f"Detuning  = {detuning:.2f}", **titledict)
    
    ax2sb[j2].plot(freqs, betaPAs, 'o-',color=f'C{j1}', ms=5, label=f"Detuning  = {detuning:.2f}")
    ax2sb[j2].set_xlabel(r'$\Delta$ (GHz)')
    ax2sb[j2].set_ylabel(r'$\beta_{\mathrm{eff}}$ ')
    ax2sb[j2].legend()
    ax2sb[j2].set_title(f"Pump Amplitude = { pref:.2f}", **titledict)
    
fig1.tight_layout()
fig1b.tight_layout()
fig2.tight_layout()
fig2b.tight_layout()

fig1.savefig(os.path.join(MEASURE_FOLDER, 'lossFeaturesDet.png'))
plt.show()
plt.close()
fig1b.savefig(os.path.join(MEASURE_FOLDER, '2bodyDet.png'))
plt.show()
plt.close()

fig2.savefig(os.path.join(MEASURE_FOLDER, 'lossFeaturesPampl.png'))
plt.show()
plt.close()
fig2b.savefig(os.path.join(MEASURE_FOLDER, '2BodyPampl.png'))
plt.show()
plt.close()

SNRdata = df_grouped['ratio'].max() - df_grouped['ratio'].min()
SNRdf = SNRdata.reset_index()
SNRdf.columns = ['pump_reference', 'pump_AOM_freq', 'SNR']
pivot_table = SNRdf.pivot('pump_reference', 'pump_AOM_freq', 'SNR')
xticklabels = [f'{180-2*x:.2f}' for x in pivot_table.columns]
yticklabels = [f'{y:.2f}' for y in pivot_table.index]
sns.heatmap(pivot_table, annot=True, fmt='.2f', xticklabels=xticklabels, yticklabels=yticklabels)
plt.xlabel("Detuning (MHz)")
plt.ylabel("Pump Reference")
plt.grid()
plt.savefig(os.path.join(MEASURE_FOLDER, 'heatmap.png'))
plt.show()
plt.close()
100%|██████████| 374/374 [00:03<00:00, 106.00it/s]
1.0 12.0
1.0 10.0
1.0 8.0
1.8499999999999994 12.0
1.8499999999999994 10.0
1.8499999999999994 8.0
<ipython-input-13-925cc9444756>:96: FutureWarning: In a future version of pandas all arguments of DataFrame.pivot will be keyword-only.
  pivot_table = SNRdf.pivot('pump_reference', 'pump_AOM_freq', 'SNR')
In [ ]:
Hdet = np.array([90, 78, 55])
Ldet = np.array([75, 61, 48.5])

HdetReif = np.array([1.0, 0.86, 0.69])
ldetReif = np.array([0.81, 0.73, 0.61])

print(Hdet/90)
print(Ldet/90)
[1.         0.86666667 0.61111111]
[0.83333333 0.67777778 0.53888889]
In [ ]:
plt.plot([0.54, 0.62, 0.68, 0.83, 0.867, 1], [0.61, 0.69, 0.73, 0.81, 0.86, 1.0])
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02bbe7d90>]
In [ ]:
x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

xerrs = np.ones(len(x))*1.2/42
yerrs = np.ones(len(y))*1.2/42

plt.errorbar(x,y, xerr=xerrs, yerr=yerrs, ecolor='k', fmt='o', capthick=2)
Out[ ]:
<ErrorbarContainer object of 3 artists>
In [ ]:
import lmfit
import numpy as np

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 6
    # variables        = 2
    chi-square         = 0.00195385
    reduced chi-square = 4.8846e-04
    Akaike info crit   = -44.1782811
    Bayesian info crit = -44.5947622
    R-squared          = 0.97946199
[[Variables]]
    m:  0.79276950 +/- 0.05739873 (7.24%) (init = 1)
    b:  0.18386746 +/- 0.04433093 (24.11%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9791
Fitted Parameters:
m = 0.7927694996949082
b = 0.18386746331587753
In [ ]:
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02b08c650>]
In [ ]:
import lmfit
import numpy as np

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x[1:]
y = y[1:]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 5
    # variables        = 2
    chi-square         = 0.00194641
    reduced chi-square = 6.4880e-04
    Akaike info crit   = -35.2560268
    Bayesian info crit = -36.0371510
    R-squared          = 0.96705463
[[Variables]]
    m:  0.78724595 +/- 0.08389218 (10.66%) (init = 1)
    b:  0.18867559 +/- 0.06802398 (36.05%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9859
Fitted Parameters:
m = 0.7872459517217716
b = 0.18867558619361577
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02e690790>]
In [ ]:
import lmfit
import numpy as np

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x[2:]
y = y[2:]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 3
    # variables        = 2
    chi-square         = 4.5058e-05
    reduced chi-square = 4.5058e-05
    Akaike info crit   = -29.3185052
    Bayesian info crit = -31.1212806
    R-squared          = 0.99767741
[[Variables]]
    m:  1.10033780 +/- 0.05309051 (4.82%) (init = 1)
    b: -0.09920368 +/- 0.04788545 (48.27%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9967
Fitted Parameters:
m = 1.1003377955777716
b = -0.09920367822511245
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02a1b9c10>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x[1:]
y = y[1:]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 5
    # variables        = 2
    chi-square         = 0.00194641
    reduced chi-square = 6.4880e-04
    Akaike info crit   = -35.2560268
    Bayesian info crit = -36.0371510
    R-squared          = 0.96705463
[[Variables]]
    m:  0.78724595 +/- 0.08389218 (10.66%) (init = 1)
    b:  0.18867559 +/- 0.06802398 (36.05%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9859
Fitted Parameters:
m = 0.7872459517217716
b = 0.18867558619361577
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02afaa0d0>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x[2:]
y = y[2:]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 4
    # variables        = 2
    chi-square         = 0.00155810
    reduced chi-square = 7.7905e-04
    Akaike info crit   = -27.4023381
    Bayesian info crit = -28.6297493
    R-squared          = 0.95963480
[[Variables]]
    m:  0.84435612 +/- 0.12245069 (14.50%) (init = 1)
    b:  0.13715234 +/- 0.10431672 (76.06%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9910
Fitted Parameters:
m = 0.8443561231133335
b = 0.1371523430628984
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02ab8d290>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [1, 3, 4, 5]]
y = y[y[i] for i in [1, 3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
  Cell In[28], line 17
    y = y[y[i] for i in [1, 3, 4, 5]]
               ^
SyntaxError: invalid syntax
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [1, 3, 4, 5]]
y = [y[i] for i in [1, 3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 4
    # variables        = 2
    chi-square         = 0.00189068
    reduced chi-square = 9.4534e-04
    Akaike info crit   = -26.6284488
    Bayesian info crit = -27.8558601
    R-squared          = 0.96172709
[[Variables]]
    m:  0.79928194 +/- 0.11274695 (14.11%) (init = 1)
    b:  0.17719545 +/- 0.09475087 (53.47%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9867
Fitted Parameters:
m = 0.7992819371542494
b = 0.17719545361483863
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02e699550>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [1, 2, 4, 5]]
y = [y[i] for i in [1, 2, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 4
    # variables        = 2
    chi-square         = 6.4267e-04
    reduced chi-square = 3.2134e-04
    Akaike info crit   = -30.9446863
    Bayesian info crit = -32.1720976
    R-squared          = 0.98910727
[[Variables]]
    m:  0.80073173 +/- 0.05941810 (7.42%) (init = 1)
    b:  0.18602065 +/- 0.04789048 (25.74%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9823
Fitted Parameters:
m = 0.8007317334382611
b = 0.1860206500519251
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02e7bc150>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [2, 3, 4, 5]]
y = [y[i] for i in [2, 3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 4
    # variables        = 2
    chi-square         = 0.00155810
    reduced chi-square = 7.7905e-04
    Akaike info crit   = -27.4023381
    Bayesian info crit = -28.6297493
    R-squared          = 0.95963480
[[Variables]]
    m:  0.84435612 +/- 0.12245069 (14.50%) (init = 1)
    b:  0.13715234 +/- 0.10431672 (76.06%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9910
Fitted Parameters:
m = 0.8443561231133335
b = 0.1371523430628984
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f030140a90>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.62, 0.68, 0.83, 0.867, 1]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [ 3, 4, 5]]
y = [y[i] for i in [ 3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 3
    # variables        = 2
    chi-square         = 4.5058e-05
    reduced chi-square = 4.5058e-05
    Akaike info crit   = -29.3185052
    Bayesian info crit = -31.1212806
    R-squared          = 0.99767741
[[Variables]]
    m:  1.10033780 +/- 0.05309051 (4.82%) (init = 1)
    b: -0.09920368 +/- 0.04788545 (48.27%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9967
Fitted Parameters:
m = 1.1003377955777716
b = -0.09920367822511245
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f030261c50>]
In [ ]:
import lmfit
import numpy as np

x = [0.56, 0.65, 0.7, 0.84, 0.867, 1.]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [ 3, 4, 5]]
y = [y[i] for i in [ 3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 3
    # variables        = 2
    chi-square         = 1.8713e-04
    reduced chi-square = 1.8713e-04
    Akaike info crit   = -25.0470264
    Bayesian info crit = -26.8498019
    R-squared          = 0.99035434
[[Variables]]
    m:  1.14430460 +/- 0.11293075 (9.87%) (init = 1)
    b: -0.14254419 +/- 0.10220678 (71.70%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9970
Fitted Parameters:
m = 1.1443046026668147
b = -0.1425441864730224
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f03029c150>]
In [ ]:
import lmfit
import numpy as np

x = [0.56, 0.65, 0.7, 0.84, 0.867, 1.]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [ 1, 2, 3, 4, 5]]
y = [y[i] for i in [ 1,2 ,3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 5
    # variables        = 2
    chi-square         = 0.00166592
    reduced chi-square = 5.5531e-04
    Akaike info crit   = -36.0340696
    Bayesian info crit = -36.8151937
    R-squared          = 0.97180225
[[Variables]]
    m:  0.85828441 +/- 0.08440902 (9.83%) (init = 1)
    b:  0.12158803 +/- 0.06929553 (56.99%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9884
Fitted Parameters:
m = 0.858284406302939
b = 0.12158803272680245
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f02de89550>]
In [ ]:
import lmfit
import numpy as np

x = [0.56, 0.65, 0.7, 0.84, 0.867, 1.]
y = [0.61, 0.69, 0.73, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [0, 1, 2, 3, 4, 5]]
y = [y[i] for i in [0, 1,2 ,3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 6
    # variables        = 2
    chi-square         = 0.00169597
    reduced chi-square = 4.2399e-04
    Akaike info crit   = -45.0275721
    Bayesian info crit = -45.4440531
    R-squared          = 0.98217274
[[Variables]]
    m:  0.84581666 +/- 0.05697634 (6.74%) (init = 1)
    b:  0.13247741 +/- 0.04464191 (33.70%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9821
Fitted Parameters:
m = 0.8458166644331041
b = 0.13247741005205974
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f030f15ed0>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.63, 0.68, 0.83 0.867, 1.]
y = [0.59, 0.65, 0.70, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = [x[i] for i in [0, 1, 2, 3, 4, 5]]
y = [y[i] for i in [0, 1,2 ,3, 4, 5]]
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
  Cell In[36], line 4
    x = [0.54, 0.63, 0.68, 0.83 0.867, 1.]
                           ^
SyntaxError: invalid syntax. Perhaps you forgot a comma?
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.63, 0.68, 0.83 0.867, 1.]
y = [0.59, 0.65, 0.70, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x
y = y
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
  Cell In[37], line 4
    x = [0.54, 0.63, 0.68, 0.83 0.867, 1.]
                           ^
SyntaxError: invalid syntax. Perhaps you forgot a comma?
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.63, 0.68, 0.83,  0.867, 1.]
y = [0.59, 0.65, 0.70, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x
y = y
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 6
    # variables        = 2
    chi-square         = 0.00105297
    reduced chi-square = 2.6324e-04
    Akaike info crit   = -47.8874079
    Bayesian info crit = -48.3038890
    R-squared          = 0.99078633
[[Variables]]
    m:  0.88179919 +/- 0.04251733 (4.82%) (init = 1)
    b:  0.10007651 +/- 0.03289483 (32.87%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9795
Fitted Parameters:
m = 0.8817991931076642
b = 0.10007651149085248
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f032595ed0>]
In [ ]:
import lmfit
import numpy as np

x = [0.54, 0.63, 0.68, 0.83,  0.867, 1.]
y = [0.55, 0.65, 0.70, 0.81, 0.86, 1.0]

# Define the linear model function
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x
y = y
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 6
    # variables        = 2
    chi-square         = 7.6478e-04
    reduced chi-square = 1.9119e-04
    Akaike info crit   = -49.8060981
    Bayesian info crit = -50.2225792
    R-squared          = 0.99411180
[[Variables]]
    m:  0.94163495 +/- 0.03623483 (3.85%) (init = 1)
    b:  0.04806431 +/- 0.02803418 (58.33%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9795
Fitted Parameters:
m = 0.9416349537885911
b = 0.048064310853879376
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f03264c150>]
In [ ]:
MEASURE_FOLDER = os.path.join(EXP_FOLDER, 'PArunOct5')
df = get_data_frame(MEASURE_FOLDER)
df.drop(columns=['betaPAErr'], inplace=True)
df.dropna(inplace=True)
#freqs = plot_results(df, 384201., save_folder=MEASURE_FOLDER)

max_freq = 384182.27
data = df

freqs = ((max_freq-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-df['currV'].min())*FREQVSCURR)
fig, ax = plt.subplots()
plot_spline_fit(ax=ax, x=freqs, y=data['ratio'], yerr=data['ratioErr'], 
                s=0.0, save_folder=MEASURE_FOLDER, 
                mfc='red', color='black', 
                title='')
plt.show()
plt.close()

betaPAs = [a for a,b in sorted(zip(data['betaPA'], freqs), key=lambda pair:pair[1])]
freqs = sorted(freqs)
plt.plot(freqs, betaPAs, 'o-', ms=5, label="")
plt.legend()
plt.xlabel(r'$\Delta$ (GHz)')
plt.ylabel(r'$\beta_{\mathrm{eff}}$ ')
#plt.savefig(join(MEASURE_FOLDER, 'betaVsFreq.png'), dpi=200) 
plt.title(f"2-body Decay Plot {''} ", **titledict) 
plt.show()
plt.close()
100%|██████████| 22/22 [01:16<00:00,  3.46s/it]
No artists with labels found to put in legend.  Note that artists whose label start with an underscore are ignored when legend() is called with no argument.
In [ ]:
df[df['ratio']==df['ratio'].min()]
Out[ ]:
sampleRate extraTime timeHold timeBaseline timeTest timeLoad timeF1 offset baseVolt BaseVoltErr ... precut_t filtertime master_clear tempV currV cat_AOM_freq cat_AOM_ampl cat_deload_t MOT_reload_t timestamp
16 2000.0 0.0 0.0 1.0 1.0 45.0 0.0 1.048 0.149145 0.000039 ... 0.5 0.275 False 0.415224 2.45 90.0 1.0 15.0 5.0 1900-01-01 14:22:43

1 rows × 118 columns

In [ ]:
df['ratio'].min()
Out[ ]:
0.610820747876365
In [ ]:
228.6 - 47.5
Out[ ]:
181.1
In [ ]:
MEASURE_FOLDER = os.path.join(EXP_FOLDER, 'largerCATamplRunSept30')
df = get_data_frame(MEASURE_FOLDER)
df.drop(columns=['betaPAErr'], inplace=True)
df.dropna(inplace=True)
#freqs = plot_results(df, 384201., save_folder=MEASURE_FOLDER)

max_freq = 384182.27
data = df

freqs = ((max_freq-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-df['currV'].min())*FREQVSCURR)
fig, ax = plt.subplots()
plot_spline_fit(ax=ax, x=freqs, y=data['ratio'], yerr=data['ratioErr'], 
                s=0.0, save_folder=MEASURE_FOLDER, 
                mfc='red', color='black', 
                title='')
plt.show()
plt.close()

betaPAs = [a for a,b in sorted(zip(data['betaPA'], freqs), key=lambda pair:pair[1])]
freqs = sorted(freqs)
plt.plot(freqs, betaPAs, 'o-', ms=5, label="")
plt.legend()
plt.xlabel(r'$\Delta$ (GHz)')
plt.ylabel(r'$\beta_{\mathrm{eff}}$ ')
plt.savefig(join(MEASURE_FOLDER, 'betaVsFreq.png'), dpi=200) 
plt.title(f"2-body Decay Plot {''} ", **titledict) 
plt.show()
plt.close()
100%|██████████| 107/107 [07:05<00:00,  3.98s/it]
No artists with labels found to put in legend.  Note that artists whose label start with an underscore are ignored when legend() is called with no argument.
In [ ]:
MEASURE_FOLDER = os.path.join(EXP_FOLDER, 'largerCATamplRunSept30')
df = get_data_frame(MEASURE_FOLDER,
                    plot=False,
                    cache_all=True)
df.drop(columns=['betaPAErr'])
df.dropna(inplace=True)
df = df[df['ratio']<2.0]

groupbyKey = 'cat_AOM_ampl'
titleKey = 'pump_AOM_freq'

df_grouped = df.groupby(by=groupbyKey)
min_ratios = df_grouped['ratio'].min()

groups = dict(list(df_grouped))
dfs = [df for df in groups.values()]

# plotting ratio vs freq
max_freqs = [384257.]*len(dfs)
zipped_data = list(zip(dfs, max_freqs))
fig, ax = plt.subplots()
for i, (df, max_freq)  in enumerate(zipped_data[:]):
    data = df
    groupKey =  data[groupbyKey].mean() 
    freqs = ((max_freq-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-data['currV'].min())*FREQVSCURR)
    ax=plot_spline_fit(ax, x=freqs, y=data['ratio'], yerr=data['ratioErr'],scolor=f'C{i}', mfc=f'C{i}',color=f'C{i}', s=0.0, ms=5, figsize=(10, 10), label=f"{groupbyKey} = {groupKey:.2f}", linewidth=2.5)

plt.legend()
plt.savefig(os.path.join(MEASURE_FOLDER, 'lossFeatures.png'))
plt.title(f'Loss Features, {titleKey} = {data[titleKey].mean():.2f}', **titledict)
plt.show()
plt.close()
#---------------------------------------------------
# x = [df[groupbyKey].mean() for df in dfs]
# y = [(df['ratio'].max() - df['ratio'].min()) for df in dfs]
# plt.plot( x, y ,'-o')
# plt.xlabel(groupbyKey)
# plt.ylabel(r'SNR $ = V_{ss, off} - V_{ss, on}$ ')
# plt.title(f'SNR Plot, {titleKey} = {df[titleKey].mean():.2f}', **titledict)
# plt.show()
# plt.savefig(join(MEASURE_FOLDER, 'SNRplot.png'), dpi=200)
# plt.close()

#---------------------------------------------------

for i, df  in enumerate(dfs[:]):
    data = df
    groupKey =  data[groupbyKey].mean() 
    freqs = ((max_freqs[0]-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-data['currV'].min())*FREQVSCURR)
    betaPAs = [a for a,b in sorted(zip(data['betaPA'], freqs), key=lambda pair:pair[1])]
    freqs = sorted(freqs)
    plt.plot(freqs, betaPAs, 'o-', ms=5, label=f"{groupbyKey}={groupKey:.2f}")
plt.legend()
plt.xlabel(r'$\Delta$ (GHz)')
plt.ylabel(r'$\beta_{\mathrm{eff}}$ ')
plt.savefig(join(MEASURE_FOLDER, 'betaVsFreq.png'), dpi=200) 
plt.title(f'2-body Decay Plot, {titleKey} = {df[titleKey].mean():.2f}', **titledict) 
plt.show()
plt.close()
100%|██████████| 109/109 [00:07<00:00, 15.36it/s]
In [ ]:
MEASURE_FOLDER = os.path.join(EXP_FOLDER, 'largerCATamplRunSept30')
df = get_data_frame(MEASURE_FOLDER,
                    plot=False,
                    cache_all=True)
df.drop(columns=['betaPAErr'], inplace=True)
df.dropna(inplace=True)
df = df[df['ratio']<2.0]

groupbyKey = 'cat_AOM_ampl'
titleKey = 'pump_AOM_freq'

df_grouped = df.groupby(by=groupbyKey)
min_ratios = df_grouped['ratio'].min()

groups = dict(list(df_grouped))
dfs = [df for df in groups.values()]

# plotting ratio vs freq
max_freqs = [384257.]*len(dfs)
zipped_data = list(zip(dfs, max_freqs))
fig, ax = plt.subplots()
for i, (df, max_freq)  in enumerate(zipped_data[:]):
    data = df
    groupKey =  data[groupbyKey].mean() 
    freqs = ((max_freq-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-data['currV'].min())*FREQVSCURR)
    ax=plot_spline_fit(ax, x=freqs, y=data['ratio'], yerr=data['ratioErr'],scolor=f'C{i}', mfc=f'C{i}',color=f'C{i}', s=0.0, ms=5, figsize=(10, 10), label=f"{groupbyKey} = {groupKey:.2f}", linewidth=2.5)

plt.legend()
plt.savefig(os.path.join(MEASURE_FOLDER, 'lossFeatures.png'))
plt.title(f'Loss Features, {titleKey} = {data[titleKey].mean():.2f}', **titledict)
plt.show()
plt.close()
#---------------------------------------------------
# x = [df[groupbyKey].mean() for df in dfs]
# y = [(df['ratio'].max() - df['ratio'].min()) for df in dfs]
# plt.plot( x, y ,'-o')
# plt.xlabel(groupbyKey)
# plt.ylabel(r'SNR $ = V_{ss, off} - V_{ss, on}$ ')
# plt.title(f'SNR Plot, {titleKey} = {df[titleKey].mean():.2f}', **titledict)
# plt.show()
# plt.savefig(join(MEASURE_FOLDER, 'SNRplot.png'), dpi=200)
# plt.close()

#---------------------------------------------------

for i, df  in enumerate(dfs[:]):
    data = df
    groupKey =  data[groupbyKey].mean() 
    freqs = ((max_freqs[0]-PUMP_FREQUENCY)-(data['tempV']-data['tempV'].min())*FREQVSVOLT- (data['currV']-data['currV'].min())*FREQVSCURR)
    betaPAs = [a for a,b in sorted(zip(data['betaPA'], freqs), key=lambda pair:pair[1])]
    freqs = sorted(freqs)
    plt.plot(freqs, betaPAs, 'o-', ms=5, label=f"{groupbyKey}={groupKey:.2f}")
plt.legend()
plt.xlabel(r'$\Delta$ (GHz)')
plt.ylabel(r'$\beta_{\mathrm{eff}}$ ')
plt.savefig(join(MEASURE_FOLDER, 'betaVsFreq.png'), dpi=200) 
plt.title(f'2-body Decay Plot, {titleKey} = {df[titleKey].mean():.2f}', **titledict) 
plt.show()
plt.close()
100%|██████████| 110/110 [00:04<00:00, 22.13it/s]
In [ ]:
Hdet = np.array([90, 78, 55])
Ldet = np.array([75, 61, 48.5])

HdetReif = np.array([1.0, 0.86, 0.67])
ldetReif = np.array([0.81, 0.72, 0.54])

print(Hdet/90)
print(Ldet/90)
[1.         0.86666667 0.61111111]
[0.83333333 0.67777778 0.53888889]
In [ ]:
def linear(x, m, b):
    return m * x + b

# Create an lmfit model using the linear function
linear_model = lmfit.Model(linear)
params = linear_model.make_params(m=1, b=0)

# Fit the model to the data
x = x
y = y
result = linear_model.fit(y, params, x=x)

print(result.fit_report())
print("Fitted Parameters:")
print("m =", result.params['m'].value)
print("b =", result.params['b'].value)
plt.plot(x, result.best_fit)
plt.plot(x, y, 'o')
[[Model]]
    Model(linear)
[[Fit Statistics]]
    # fitting method   = leastsq
    # function evals   = 7
    # data points      = 6
    # variables        = 2
    chi-square         = 7.6478e-04
    reduced chi-square = 1.9119e-04
    Akaike info crit   = -49.8060981
    Bayesian info crit = -50.2225792
    R-squared          = 0.99411180
[[Variables]]
    m:  0.94163495 +/- 0.03623483 (3.85%) (init = 1)
    b:  0.04806431 +/- 0.02803418 (58.33%) (init = 0)
[[Correlations]] (unreported correlations are < 0.100)
    C(m, b) = -0.9795
Fitted Parameters:
m = 0.9416349537885911
b = 0.048064310853879376
Out[ ]:
[<matplotlib.lines.Line2D at 0x1f03bc2cc10>]